This commit includes a laundry list of updates and tweaks to reflect the current
API of the registry:
* `registry.host` has been renamed to `registry.index`
* New top-level manifest keys are now accepted:
* `homepage` - url
* `documentation` - url
* `repository` - url
* `description` - a markdown-less blurb
* `license` - string (verified by the registry on upload)
* `keywords` - string array
* `readme` - string pointing at a file
* Authors are now uploaded to the registry
* The upload format to the registry has changed to a body json payload
* Unpacking tarballs respects the executable bit for scripts and such.
* Downloading now follows redirects to go to S3.
* The download URL for a package has changed slightly.
* Verify path dependencies have a version listed when being uploaded
* Rename `upload` to `publish`
* Rename `ops::cargo_upload` to `ops::registry`
* Add a new `registry` package for interoperating with the registry
* Add the ability to modify owners via `cargo owner`
* Add a `readme` key to the manifest, and upload its contents to the registry.
* Add the ability to yank crates and their versions
* When packaging a library, verify that it builds from the packaged source by
unpacking the tarball and simulate running `cargo build` inside of it.
version = "0.0.1-pre"
dependencies = [
"curl 0.0.1 (git+https://github.com/alexcrichton/curl-rust?ref=bundle#36b015de91daf6310227cec04ef30acf5929dfb6)",
- "docopt 0.6.4 (git+https://github.com/docopt/docopt.rs#4544a9f422b115c2ffef4ee9baf27ceb07c34602)",
+ "docopt 0.6.5 (git+https://github.com/docopt/docopt.rs#1c9a63e0362848570f7c503f8891770ebf2d1eab)",
"flate2 0.0.1 (git+https://github.com/alexcrichton/flate2-rs#68971ae77a523c7ec3f19b4bcd195f76291ea390)",
- "git2 0.0.1 (git+https://github.com/alexcrichton/git2-rs#7d7fba10893590793ae88c8fc6ab2aeffcb8f10b)",
+ "git2 0.0.1 (git+https://github.com/alexcrichton/git2-rs#81cc81ac8daa4fdfc78d75385bb938425a632dcb)",
"glob 0.0.1 (git+https://github.com/rust-lang/glob#469a6bc1a0fc289ab220170e691cffbc01dcf1e6)",
"hamcrest 0.1.0 (git+https://github.com/carllerche/hamcrest-rust.git#7d46e76514ac606530dfb0e93270fffcf64ca76d)",
- "semver 0.1.0 (git+https://github.com/rust-lang/semver#9bb8265ea6cf01eddfa7dc5ec9334883443e9fc7)",
- "tar 0.0.1 (git+https://github.com/alexcrichton/tar-rs#943d7c0173c7fa5e8c58978add0180569c68d7f7)",
+ "registry 0.0.1-pre",
+ "semver 0.1.0 (git+https://github.com/rust-lang/semver#0eee1b33e90a62ed03a123b94c8e06cdbaf5b662)",
+ "tar 0.0.1 (git+https://github.com/alexcrichton/tar-rs#c477f1ca1b6dde36ebf1f4a739033fe485895722)",
"toml 0.1.0 (git+https://github.com/alexcrichton/toml-rs#8a3ba4c65cfa22a3d924293a1fb3a70bfac5e66c)",
"url 0.1.0 (git+https://github.com/servo/rust-url#7f1991d847fb8cf8648def2ff121bae90b89131f)",
]
[[package]]
name = "docopt"
-version = "0.6.4"
-source = "git+https://github.com/docopt/docopt.rs#4544a9f422b115c2ffef4ee9baf27ceb07c34602"
+version = "0.6.5"
+source = "git+https://github.com/docopt/docopt.rs#1c9a63e0362848570f7c503f8891770ebf2d1eab"
[[package]]
name = "encoding"
[[package]]
name = "git2"
version = "0.0.1"
-source = "git+https://github.com/alexcrichton/git2-rs#7d7fba10893590793ae88c8fc6ab2aeffcb8f10b"
+source = "git+https://github.com/alexcrichton/git2-rs#81cc81ac8daa4fdfc78d75385bb938425a632dcb"
dependencies = [
- "libgit2 0.0.1 (git+https://github.com/alexcrichton/git2-rs#7d7fba10893590793ae88c8fc6ab2aeffcb8f10b)",
+ "libgit2 0.0.1 (git+https://github.com/alexcrichton/git2-rs#81cc81ac8daa4fdfc78d75385bb938425a632dcb)",
"url 0.1.0 (git+https://github.com/servo/rust-url#7f1991d847fb8cf8648def2ff121bae90b89131f)",
]
[[package]]
name = "libgit2"
version = "0.0.1"
-source = "git+https://github.com/alexcrichton/git2-rs#7d7fba10893590793ae88c8fc6ab2aeffcb8f10b"
+source = "git+https://github.com/alexcrichton/git2-rs#81cc81ac8daa4fdfc78d75385bb938425a632dcb"
dependencies = [
"libssh2-static-sys 0.0.1 (git+https://github.com/alexcrichton/libssh2-static-sys#80e71a3021618eb05656c58fb7c5ef5f12bc747f)",
"link-config 0.0.1 (git+https://github.com/alexcrichton/link-config#0202cc8aa74a7b0bdbaef4eef368d0bc80f63691)",
version = "0.0.1"
source = "git+https://github.com/alexcrichton/openssl-static-sys#d218fa63aabefb3ac56a44985e2df8a2dc932450"
+[[package]]
+name = "registry"
+version = "0.0.1-pre"
+dependencies = [
+ "curl 0.0.1 (git+https://github.com/alexcrichton/curl-rust?ref=bundle#36b015de91daf6310227cec04ef30acf5929dfb6)",
+]
+
[[package]]
name = "semver"
version = "0.1.0"
-source = "git+https://github.com/rust-lang/semver#9bb8265ea6cf01eddfa7dc5ec9334883443e9fc7"
+source = "git+https://github.com/rust-lang/semver#0eee1b33e90a62ed03a123b94c8e06cdbaf5b662"
[[package]]
name = "tar"
version = "0.0.1"
-source = "git+https://github.com/alexcrichton/tar-rs#943d7c0173c7fa5e8c58978add0180569c68d7f7"
+source = "git+https://github.com/alexcrichton/tar-rs#c477f1ca1b6dde36ebf1f4a739033fe485895722"
[[package]]
name = "toml"
[dependencies.glob]
git = "https://github.com/rust-lang/glob"
+[dependencies.registry]
+path = "src/registry"
+
[[bin]]
name = "cargo"
test = false
$macro!(locate_project)
$macro!(login)
$macro!(new)
+ $macro!(owner)
$macro!(package)
$macro!(pkgid)
+ $macro!(publish)
$macro!(read_manifest)
$macro!(run)
$macro!(test)
$macro!(update)
- $macro!(upload)
$macro!(verify_project)
$macro!(version)
+ $macro!(yank)
}) )
/**
};
let token = token.as_slice().trim().to_string();
- try!(ops::upload_login(shell, token).map_err(|e| {
+ try!(ops::registry_login(shell, token).map_err(|e| {
CliError::from_boxed(e, 101)
}));
Ok(None)
--- /dev/null
+use cargo::ops;
+use cargo::core::MultiShell;
+use cargo::util::{CliResult, CliError};
+use cargo::util::important_paths::find_root_manifest_for_cwd;
+
+#[deriving(Decodable)]
+struct Options {
+ arg_crate: Option<String>,
+ flag_token: Option<String>,
+ flag_add: Option<Vec<String>>,
+ flag_remove: Option<Vec<String>>,
+ flag_index: Option<String>,
+ flag_verbose: bool,
+}
+
+pub const USAGE: &'static str = "
+Manage the owners of a crate on the registry
+
+Usage:
+ cargo owner [options] [<crate>]
+
+Options:
+ -h, --help Print this message
+ -a, --add LOGIN Login of a user to add as an owner
+ -r, --remove LOGIN Login of a user to remove as an owner
+ --index INDEX Registry index to modify owners for
+ --token TOKEN API token to use when authenticating
+ -v, --verbose Use verbose output
+
+This command will modify the owners for a package on the specified registry (or
+default). Note that owners of a package can upload new versions, yank old
+versions, and also modify the set of owners, so take caution!
+";
+
+pub fn execute(options: Options, shell: &mut MultiShell) -> CliResult<Option<()>> {
+ shell.set_verbose(options.flag_verbose);
+ let root = try!(find_root_manifest_for_cwd(None));
+ try!(ops::modify_owners(&root, shell,
+ options.arg_crate,
+ options.flag_token,
+ options.flag_index,
+ options.flag_add,
+ options.flag_remove).map_err(|e| {
+ CliError::from_boxed(e, 101)
+ }));
+ Ok(None)
+}
+
+
struct Options {
flag_verbose: bool,
flag_manifest_path: Option<String>,
+ flag_no_verify: bool,
}
pub const USAGE: &'static str = "
Options:
-h, --help Print this message
--manifest-path PATH Path to the manifest to compile
+ --no-verify Don't verify the contents by building them
-v, --verbose Use verbose output
";
pub fn execute(options: Options, shell: &mut MultiShell) -> CliResult<Option<()>> {
shell.set_verbose(options.flag_verbose);
- let Options {
- flag_manifest_path,
- ..
- } = options;
-
- let root = try!(find_root_manifest_for_cwd(flag_manifest_path.clone()));
- ops::package(&root, shell).map(|_| None).map_err(|err| {
+ let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path));
+ ops::package(&root, shell, !options.flag_no_verify).map(|_| None).map_err(|err| {
CliError::from_boxed(err, 101)
})
}
--- /dev/null
+use cargo::ops;
+use cargo::core::{MultiShell};
+use cargo::util::{CliResult, CliError};
+use cargo::util::important_paths::find_root_manifest_for_cwd;
+
+#[deriving(Decodable)]
+struct Options {
+ flag_host: Option<String>,
+ flag_token: Option<String>,
+ flag_manifest_path: Option<String>,
+ flag_verbose: bool,
+ flag_no_verify: bool,
+}
+
+pub const USAGE: &'static str = "
+Upload a package to the registry
+
+Usage:
+ cargo publish [options]
+
+Options:
+ -h, --help Print this message
+ --host HOST Host to upload the package to
+ --token TOKEN Token to use when uploading
+ --no-verify Don't verify package tarball before publish
+ --manifest-path PATH Path to the manifest to compile
+ -v, --verbose Use verbose output
+
+";
+
+pub fn execute(options: Options, shell: &mut MultiShell) -> CliResult<Option<()>> {
+ shell.set_verbose(options.flag_verbose);
+ let Options {
+ flag_token: token,
+ flag_host: host,
+ flag_manifest_path,
+ flag_no_verify: no_verify,
+ ..
+ } = options;
+
+ let root = try!(find_root_manifest_for_cwd(flag_manifest_path.clone()));
+ ops::publish(&root, shell, token, host, !no_verify).map(|_| None).map_err(|err| {
+ CliError::from_boxed(err, 101)
+ })
+}
+++ /dev/null
-use cargo::ops;
-use cargo::core::{MultiShell};
-use cargo::util::{CliResult, CliError};
-use cargo::util::important_paths::find_root_manifest_for_cwd;
-
-#[deriving(Decodable)]
-struct Options {
- flag_host: Option<String>,
- flag_token: Option<String>,
- flag_manifest_path: Option<String>,
- flag_verbose: bool,
-}
-
-pub const USAGE: &'static str = "
-Upload a package to the registry
-
-Usage:
- cargo upload [options]
-
-Options:
- -h, --help Print this message
- --host HOST Host to upload the package to
- --token TOKEN Token to use when uploading
- --manifest-path PATH Path to the manifest to compile
- -v, --verbose Use verbose output
-
-";
-
-pub fn execute(options: Options, shell: &mut MultiShell) -> CliResult<Option<()>> {
- shell.set_verbose(options.flag_verbose);
- let Options {
- flag_token: token,
- flag_host: host,
- flag_manifest_path,
- ..
- } = options;
-
- let root = try!(find_root_manifest_for_cwd(flag_manifest_path.clone()));
- ops::upload(&root, shell, token, host).map(|_| None).map_err(|err| {
- CliError::from_boxed(err, 101)
- })
-}
--- /dev/null
+use cargo::ops;
+use cargo::core::MultiShell;
+use cargo::util::{CliResult, CliError};
+use cargo::util::important_paths::find_root_manifest_for_cwd;
+
+#[deriving(Decodable)]
+struct Options {
+ arg_crate: Option<String>,
+ flag_token: Option<String>,
+ flag_vers: Option<String>,
+ flag_index: Option<String>,
+ flag_verbose: bool,
+ flag_undo: bool,
+}
+
+pub static USAGE: &'static str = "
+Remove a pushed crate from the index
+
+Usage:
+ cargo yank [options] [<crate>]
+
+Options:
+ -h, --help Print this message
+ --vers VERSION The version to yank or un-yank
+ --undo Undo a yank, putting a version back into the index
+ --index INDEX Registry index to yank from
+ --token TOKEN API token to use when authenticating
+ -v, --verbose Use verbose output
+
+The yank command removes a previously pushed crate's version from the server's
+index. This command does not delete any data, and the crate will still be
+available for download via the registry's download link.
+
+Note that existing crates locked to a yanked version will still be able to
+download the yanked version to use it. Cargo will, however, not allow any new
+crates to be locked to any yanked version.
+";
+
+pub fn execute(options: Options, shell: &mut MultiShell) -> CliResult<Option<()>> {
+ shell.set_verbose(options.flag_verbose);
+ let root = try!(find_root_manifest_for_cwd(None));
+ try!(ops::yank(&root, shell,
+ options.arg_crate,
+ options.flag_vers,
+ options.flag_token,
+ options.flag_index,
+ options.flag_undo).map_err(|e| {
+ CliError::from_boxed(e, 101)
+ }));
+ Ok(None)
+}
+
+
+
-use core::{SourceId,Summary};
+use core::{SourceId, Summary};
use semver::VersionReq;
use util::CargoResult;
name: String,
source_id: SourceId,
req: VersionReq,
+ specified_req: Option<String>,
transitive: bool,
only_match_name: bool,
pub fn parse(name: &str,
version: Option<&str>,
source_id: &SourceId) -> CargoResult<Dependency> {
- let version = match version {
+ let version_req = match version {
Some(v) => try!(VersionReq::parse(v)),
None => VersionReq::any()
};
Ok(Dependency {
only_match_name: false,
- req: version,
+ req: version_req,
+ specified_req: version.map(|s| s.to_string()),
.. Dependency::new_override(name, source_id)
})
}
optional: false,
features: Vec::new(),
default_features: true,
+ specified_req: None,
}
}
&self.req
}
+ pub fn get_specified_req(&self) -> Option<&str> {
+ self.specified_req.as_ref().map(|s| s.as_slice())
+ }
+
pub fn get_name(&self) -> &str {
self.name.as_slice()
}
#[deriving(PartialEq,Clone)]
pub struct Manifest {
summary: Summary,
- authors: Vec<String>,
targets: Vec<Target>,
target_dir: Path,
doc_dir: Path,
build: Vec<String>,
warnings: Vec<String>,
exclude: Vec<String>,
+ metadata: ManifestMetadata,
}
impl Show for Manifest {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
- write!(f, "Manifest({}, authors={}, targets={}, target_dir={}, \
+ write!(f, "Manifest({}, targets={}, target_dir={}, \
build={})",
- self.summary, self.authors, self.targets,
- self.target_dir.display(), self.build)
+ self.summary, self.targets, self.target_dir.display(),
+ self.build)
}
}
+/// General metadata about a package which is just blindly uploaded to the
+/// registry.
+///
+/// Note that many of these fields can contain invalid values such as the
+/// homepage, repository, documentation, or license. These fields are not
+/// validated by cargo itself, but rather it is up to the registry when uploaded
+/// to validate these fields. Cargo will itself accept any valid TOML
+/// specification for these values.
+#[deriving(PartialEq, Clone)]
+pub struct ManifestMetadata {
+ pub authors: Vec<String>,
+ pub keywords: Vec<String>,
+ pub license: Option<String>,
+ pub description: Option<String>, // not markdown
+ pub readme: Option<String>, // file, not contents
+ pub homepage: Option<String>, // url
+ pub repository: Option<String>, // url
+ pub documentation: Option<String>, // url
+}
+
#[deriving(PartialEq,Clone,Encodable)]
pub struct SerializedManifest {
name: String,
version: String,
dependencies: Vec<SerializedDependency>,
- authors: Vec<String>,
targets: Vec<Target>,
target_dir: String,
doc_dir: String,
dependencies: self.summary.get_dependencies().iter().map(|d| {
SerializedDependency::from_dependency(d)
}).collect(),
- authors: self.authors.clone(),
targets: self.targets.clone(),
target_dir: self.target_dir.display().to_string(),
doc_dir: self.doc_dir.display().to_string(),
impl Manifest {
pub fn new(summary: Summary, targets: Vec<Target>,
target_dir: Path, doc_dir: Path, sources: Vec<SourceId>,
- build: Vec<String>, exclude: Vec<String>) -> Manifest {
+ build: Vec<String>, exclude: Vec<String>,
+ metadata: ManifestMetadata) -> Manifest {
Manifest {
summary: summary,
- authors: Vec::new(),
targets: targets,
target_dir: target_dir,
doc_dir: doc_dir,
build: build,
warnings: Vec::new(),
exclude: exclude,
+ metadata: metadata,
}
}
self.get_summary().get_package_id().get_version()
}
- pub fn get_authors(&self) -> &[String] {
- self.authors.as_slice()
- }
-
pub fn get_dependencies(&self) -> &[Dependency] {
self.get_summary().get_dependencies()
}
pub fn get_exclude(&self) -> &[String] {
self.exclude.as_slice()
}
+
+ pub fn get_metadata(&self) -> &ManifestMetadata { &self.metadata }
+
+ pub fn set_summary(&mut self, summary: Summary) {
+ self.summary = summary;
+ }
}
impl Target {
name: String,
version: String,
dependencies: Vec<SerializedDependency>,
- authors: Vec<String>,
targets: Vec<Target>,
manifest_path: String,
}
dependencies: summary.get_dependencies().iter().map(|d| {
SerializedDependency::from_dependency(d)
}).collect(),
- authors: manifest.get_authors().to_vec(),
targets: manifest.get_targets().to_vec(),
manifest_path: self.manifest_path.display().to_string()
}.encode(s)
/// represents a local path
PathKind,
/// represents the central registry
- RegistryKind
+ RegistryKind,
}
type Error = Box<CargoError + Send>;
Ok(SourceId::for_registry(&try!(RegistrySource::url())))
}
- pub fn get_url(&self) -> &Url {
- &self.inner.url
- }
-
- pub fn get_kind(&self) -> &SourceKind {
- &self.inner.kind
- }
-
- pub fn is_path(&self) -> bool {
- self.inner.kind == PathKind
- }
+ pub fn get_url(&self) -> &Url { &self.inner.url }
+ pub fn get_kind(&self) -> &SourceKind { &self.inner.kind }
+ pub fn is_path(&self) -> bool { self.inner.kind == PathKind }
+ pub fn is_registry(&self) -> bool { self.inner.kind == RegistryKind }
pub fn is_git(&self) -> bool {
match self.inner.kind {
extern crate url;
#[cfg(test)] extern crate hamcrest;
+extern crate registry;
+
use std::os;
use std::io::stdio::{stdout_raw, stderr_raw};
use std::io::{mod, stdout, stderr};
use std::collections::HashMap;
use core::registry::PackageRegistry;
-use core::{MultiShell, Source, SourceId, PackageSet, Target, PackageId};
+use core::{MultiShell, Source, SourceId, PackageSet, Package, Target, PackageId};
use core::resolver;
use ops;
use sources::{PathSource};
pub fn compile(manifest_path: &Path,
options: &mut CompileOptions)
-> CargoResult<ops::Compilation> {
- let CompileOptions { env, ref mut shell, jobs, target, spec,
- dev_deps, features, no_default_features } = *options;
- let target = target.map(|s| s.to_string());
- let features = features.iter().flat_map(|s| {
- s.as_slice().split(' ')
- }).map(|s| s.to_string()).collect::<Vec<String>>();
-
log!(4, "compile; manifest-path={}", manifest_path.display());
- if spec.is_some() && (no_default_features || features.len() > 0) {
- return Err(human("features cannot be modified when the main package \
- is not being built"))
- }
-
let mut source = try!(PathSource::for_path(&manifest_path.dir_path()));
try!(source.update());
debug!("loaded package; package={}", package);
for key in package.get_manifest().get_warnings().iter() {
- try!(shell.warn(key))
+ try!(options.shell.warn(key))
+ }
+ compile_pkg(&package, options)
+}
+
+pub fn compile_pkg(package: &Package, options: &mut CompileOptions)
+ -> CargoResult<ops::Compilation> {
+ let CompileOptions { env, ref mut shell, jobs, target, spec,
+ dev_deps, features, no_default_features } = *options;
+ let target = target.map(|s| s.to_string());
+ let features = features.iter().flat_map(|s| {
+ s.as_slice().split(' ')
+ }).map(|s| s.to_string()).collect::<Vec<String>>();
+
+ if spec.is_some() && (no_default_features || features.len() > 0) {
+ return Err(human("features cannot be modified when the main package \
+ is not being built"))
}
let user_configs = try!(config::all_configs(os::getcwd()));
let override_ids = try!(source_ids_from_config(&user_configs,
- manifest_path.dir_path()));
+ package.get_root()));
let (packages, resolve_with_overrides, sources) = {
let mut config = try!(Config::new(*shell, jobs, target.clone()));
// First, resolve the package's *listed* dependencies, as well as
// downloading and updating all remotes and such.
- try!(ops::resolve_and_fetch(&mut registry, &package));
+ try!(ops::resolve_and_fetch(&mut registry, package));
// Second, resolve with precisely what we're doing. Filter out
// transitive dependencies if necessary, specify features, handle
let pkgid = try!(resolve_with_overrides.query(spec));
packages.iter().find(|p| p.get_package_id() == pkgid).unwrap()
}
- None => &package,
+ None => package,
};
let targets = to_build.get_targets().iter().filter(|target| {
-use std::io::File;
+use std::io::{fs, File};
use std::io::fs::PathExtensions;
use std::path;
use tar::Archive;
use flate2::{GzBuilder, BestCompression};
+use flate2::reader::GzDecoder;
-use core::source::Source;
-use core::{Package, MultiShell};
+use core::source::{Source, SourceId};
+use core::{Package, MultiShell, Summary, Dependency};
use sources::PathSource;
use util::{CargoResult, human, internal, ChainError, Require};
+use ops;
+
+struct Bomb { path: Option<Path> }
+
+impl Drop for Bomb {
+ fn drop(&mut self) {
+ match self.path.as_ref() {
+ Some(path) => { let _ = fs::unlink(path); }
+ None => {}
+ }
+ }
+}
pub fn package(manifest_path: &Path,
- shell: &mut MultiShell) -> CargoResult<Path> {
+ shell: &mut MultiShell,
+ verify: bool) -> CargoResult<Path> {
let mut src = try!(PathSource::for_path(&manifest_path.dir_path()));
try!(src.update());
let pkg = try!(src.get_root_package());
let dst = pkg.get_manifest_path().dir_path().join(filename);
if dst.exists() { return Ok(dst) }
+ let mut bomb = Bomb { path: Some(dst.clone()) };
+
try!(shell.status("Packaging", pkg.get_package_id().to_string()));
try!(tar(&pkg, &src, shell, &dst).chain_error(|| {
human("failed to prepare local package for uploading")
}));
- Ok(dst)
+ if verify {
+ try!(run_verify(&pkg, shell, &dst).chain_error(|| {
+ human("failed to verify package tarball")
+ }))
+ }
+ Ok(bomb.path.take().unwrap())
}
fn tar(pkg: &Package, src: &PathSource, shell: &mut MultiShell,
try!(ar.finish());
Ok(())
}
+
+fn run_verify(pkg: &Package, shell: &mut MultiShell, tar: &Path)
+ -> CargoResult<()> {
+ try!(shell.status("Verifying", pkg));
+
+ let f = try!(GzDecoder::new(try!(File::open(tar))));
+ let dst = pkg.get_root().join("target/package");
+ if dst.exists() {
+ try!(fs::rmdir_recursive(&dst));
+ }
+ let mut archive = Archive::new(f);
+ try!(archive.unpack(&dst));
+ let manifest_path = dst.join(format!("{}-{}/Cargo.toml", pkg.get_name(),
+ pkg.get_version()));
+
+ // When packages are uploaded to the registry, all path dependencies are
+ // implicitly converted to registry-based dependencies, so we rewrite those
+ // dependencies here.
+ let registry = try!(SourceId::for_central());
+ let new_deps = pkg.get_dependencies().iter().map(|d| {
+ if !d.get_source_id().is_path() { return d.clone() }
+ Dependency::parse(d.get_name(), d.get_specified_req(), ®istry)
+ .unwrap()
+ .transitive(d.is_transitive())
+ .features(d.get_features().to_vec())
+ .default_features(d.uses_default_features())
+ .optional(d.is_optional())
+ }).collect::<Vec<_>>();
+ let new_summary = Summary::new(pkg.get_package_id().clone(),
+ new_deps,
+ pkg.get_summary().get_features().clone());
+ let mut new_manifest = pkg.get_manifest().clone();
+ new_manifest.set_summary(new_summary.unwrap());
+ let new_pkg = Package::new(new_manifest,
+ &manifest_path,
+ pkg.get_package_id().get_source_id());
+
+ // Now that we've rewritten all our path dependencies, compile it!
+ try!(ops::compile_pkg(&new_pkg, &mut ops::CompileOptions {
+ env: "compile",
+ shell: shell,
+ jobs: None,
+ target: None,
+ dev_deps: false,
+ features: [],
+ no_default_features: false,
+ spec: None,
+ }));
+
+ Ok(())
+}
let (pkg, nested) = try!(read_package(&manifest, source_id));
let mut ret = vec![pkg];
- for p in nested.iter() {
- ret.extend(try!(read_nested_packages(&path.join(p),
- source_id,
- visited)).into_iter());
+ // Registry sources are not allowed to have `path=` dependencies because
+ // they're all translated to actual registry dependencies.
+ if !source_id.is_registry() {
+ for p in nested.iter() {
+ ret.extend(try!(read_nested_packages(&path.join(p),
+ source_id,
+ visited)).into_iter());
+ }
}
Ok(ret)
+++ /dev/null
-use std::collections::HashMap;
-use std::io::File;
-use std::os;
-use std::str;
-use serialize::json::{mod, ToJson};
-
-use curl::http;
-use git2;
-
-use core::source::Source;
-use core::{Package, MultiShell, SourceId, RegistryKind};
-use ops;
-use sources::{PathSource, RegistrySource};
-use util::config;
-use util::{CargoResult, human, internal, ChainError, Require, ToUrl};
-use util::config::{Config, Table};
-
-pub struct UploadConfig {
- pub host: Option<String>,
- pub token: Option<String>,
-}
-
-pub fn upload(manifest_path: &Path,
- shell: &mut MultiShell,
- token: Option<String>,
- host: Option<String>) -> CargoResult<()> {
- let mut src = try!(PathSource::for_path(&manifest_path.dir_path()));
- try!(src.update());
- let pkg = try!(src.get_root_package());
-
- // Parse all configuration options
- let UploadConfig { token: token_config, .. } = try!(upload_configuration());
- let token = try!(token.or(token_config).require(|| {
- human("no upload token found, please run `cargo login`")
- }));
- let host = host.unwrap_or(try!(RegistrySource::url()).to_string());
- let host = try!(host.as_slice().to_url().map_err(human));
- let upload = {
- let sid = SourceId::new(RegistryKind, host.clone());
- let mut config = try!(Config::new(shell, None, None));
- let mut src = RegistrySource::new(&sid, &mut config);
- try!(src.update().chain_error(|| {
- human(format!("Failed to update registry {}", host))
- }));
- (try!(src.config())).upload
- };
-
- // First, prepare a tarball
- let tarball = try!(ops::package(manifest_path, shell));
- let tarball = try!(File::open(&tarball));
-
- // Upload said tarball to the specified destination
- try!(shell.status("Uploading", pkg.get_package_id().to_string()));
- try!(transmit(&pkg, tarball, token.as_slice(),
- upload.as_slice()).chain_error(|| {
- human(format!("failed to upload package to registry: {}", upload))
- }));
-
- Ok(())
-}
-
-fn transmit(pkg: &Package, mut tarball: File,
- token: &str, host: &str) -> CargoResult<()> {
- let stat = try!(tarball.stat());
- let url = try!(host.to_url().map_err(human));
- let registry_src = SourceId::for_registry(&url);
-
- let mut handle = try!(http_handle());
- let mut req = handle.put(host, &mut tarball)
- .content_length(stat.size as uint)
- .content_type("application/x-tar")
- .header("Content-Encoding", "x-gzip")
- .header("X-Cargo-Auth", token)
- .header("X-Cargo-Pkg-Name", pkg.get_name())
- .header("X-Cargo-Pkg-Version",
- pkg.get_version().to_string().as_slice());
-
- let mut dep_header = String::new();
- for (i, dep) in pkg.get_dependencies().iter().enumerate() {
- if !dep.is_transitive() { continue }
- if dep.get_source_id() != ®istry_src {
- return Err(human(format!("All dependencies must come from the \
- same registry.\nDependency `{}` comes \
- from {} instead", dep.get_name(),
- dep.get_source_id())))
- }
-
- // See Registry::parse_registry_dependency for format
- let opt = if dep.is_optional() {"-"} else {""};
- let default = if dep.uses_default_features() {""} else {"*"};
- let features = dep.get_features().connect(",");
- let header = format!("{}{}{}|{}|{}", opt, default, dep.get_name(),
- features, dep.get_version_req());
- if i > 0 { dep_header.push_str(";"); }
- dep_header.push_str(header.as_slice());
- }
- req = req.header("X-Cargo-Pkg-Dep", dep_header.as_slice());
-
- let feature_header = pkg.get_summary().get_features().to_json().to_string();
- req = req.header("X-Cargo-Pkg-Feature", feature_header.as_slice());
-
- let response = try!(req.exec());
-
- if response.get_code() == 0 { return Ok(()) } // file upload url
- if response.get_code() != 200 {
- return Err(internal(format!("failed to get a 200 response: {}",
- response)))
- }
-
- let body = try!(str::from_utf8(response.get_body()).require(|| {
- internal("failed to get a utf-8 response")
- }));
-
- #[deriving(Decodable)]
- struct Response { ok: bool }
- #[deriving(Decodable)]
- struct BadResponse { error: String }
- let json = try!(json::decode::<Response>(body));
- if json.ok { return Ok(()) }
-
- let json = try!(json::decode::<BadResponse>(body));
- Err(human(format!("failed to upload `{}`: {}", pkg, json.error)))
-}
-
-pub fn upload_configuration() -> CargoResult<UploadConfig> {
- let configs = try!(config::all_configs(os::getcwd()));
- let registry = match configs.find_equiv(&"registry") {
- None => return Ok(UploadConfig { host: None, token: None }),
- Some(registry) => try!(registry.table().chain_error(|| {
- internal("invalid configuration for the key `registry`")
- })),
- };
- let host = match registry.find_equiv(&"host") {
- None => None,
- Some(host) => {
- Some(try!(host.string().chain_error(|| {
- internal("invalid configuration for key `host`")
- })).ref0().to_string())
- }
- };
- let token = match registry.find_equiv(&"token") {
- None => None,
- Some(token) => {
- Some(try!(token.string().chain_error(|| {
- internal("invalid configuration for key `token`")
- })).ref0().to_string())
- }
- };
- Ok(UploadConfig { host: host, token: token })
-}
-
-/// Create a new HTTP handle with appropriate global configuration for cargo.
-pub fn http_handle() -> CargoResult<http::Handle> {
- Ok(match try!(http_proxy()) {
- Some(proxy) => http::handle().proxy(proxy),
- None => http::handle(),
- })
-}
-
-/// Find a globally configured HTTP proxy if one is available.
-///
-/// Favor cargo's `http.proxy`, then git's `http.proxy`, then finally a
-/// HTTP_PROXY env var.
-pub fn http_proxy() -> CargoResult<Option<String>> {
- let configs = try!(config::all_configs(os::getcwd()));
- match configs.find_equiv(&"http") {
- Some(http) => {
- let http = try!(http.table().chain_error(|| {
- internal("invalid configuration for the key `http`")
- }));
- match http.find_equiv(&"proxy") {
- Some(proxy) => {
- return Ok(Some(try!(proxy.string().chain_error(|| {
- internal("invalid configuration for key `http.proxy`")
- })).ref0().to_string()))
- }
- None => {},
- }
- }
- None => {}
- }
- match git2::Config::open_default() {
- Ok(cfg) => {
- match cfg.get_str("http.proxy") {
- Ok(s) => return Ok(Some(s.to_string())),
- Err(..) => {}
- }
- }
- Err(..) => {}
- }
- Ok(os::getenv("HTTP_PROXY"))
-}
-
-pub fn upload_login(shell: &mut MultiShell, token: String) -> CargoResult<()> {
- let config = try!(Config::new(shell, None, None));
- let UploadConfig { host, token: _ } = try!(upload_configuration());
- let mut map = HashMap::new();
- let p = os::getcwd();
- match host {
- Some(host) => {
- map.insert("host".to_string(), config::String(host, p.clone()));
- }
- None => {}
- }
- map.insert("token".to_string(), config::String(token, p));
-
- config::set_config(&config, config::Global, "registry", config::Table(map))
-}
pub use self::cargo_clean::{clean, CleanOptions};
-pub use self::cargo_compile::{compile, CompileOptions};
+pub use self::cargo_compile::{compile, compile_pkg, CompileOptions};
pub use self::cargo_read_manifest::{read_manifest,read_package,read_packages};
pub use self::cargo_rustc::{compile_targets, Compilation, Layout, Kind};
pub use self::cargo_rustc::{KindTarget, KindPlugin, Context, LayoutProxy};
pub use self::cargo_generate_lockfile::UpdateOptions;
pub use self::cargo_test::{run_tests, run_benches, TestOptions};
pub use self::cargo_package::package;
-pub use self::cargo_upload::{upload, upload_configuration, UploadConfig};
-pub use self::cargo_upload::{upload_login, http_proxy, http_handle};
+pub use self::registry::{publish, registry_configuration, RegistryConfig};
+pub use self::registry::{registry_login, http_proxy, http_handle};
+pub use self::registry::{modify_owners, yank};
pub use self::cargo_fetch::{fetch, resolve_and_fetch};
pub use self::cargo_pkgid::pkgid;
mod cargo_generate_lockfile;
mod cargo_test;
mod cargo_package;
-mod cargo_upload;
mod cargo_fetch;
mod cargo_pkgid;
+mod registry;
--- /dev/null
+use std::collections::HashMap;
+use std::io::File;
+use std::os;
+
+use curl::http;
+use git2;
+use registry::{Registry, NewCrate, NewCrateDependency};
+
+use core::source::Source;
+use core::{Package, MultiShell, SourceId, RegistryKind};
+use core::manifest::ManifestMetadata;
+use ops;
+use sources::{PathSource, RegistrySource};
+use util::config;
+use util::{CargoResult, human, internal, ChainError, Require, ToUrl};
+use util::config::{Config, Table};
+
+pub struct RegistryConfig {
+ pub index: Option<String>,
+ pub token: Option<String>,
+}
+
+pub fn publish(manifest_path: &Path,
+ shell: &mut MultiShell,
+ token: Option<String>,
+ index: Option<String>,
+ verify: bool) -> CargoResult<()> {
+ let mut src = try!(PathSource::for_path(&manifest_path.dir_path()));
+ try!(src.update());
+ let pkg = try!(src.get_root_package());
+
+ let (mut registry, reg_id) = try!(registry(shell, token, index));
+ try!(verify_dependencies(&pkg, ®_id));
+
+ // Prepare a tarball
+ let tarball = try!(ops::package(manifest_path, shell, verify));
+
+ // Upload said tarball to the specified destination
+ try!(shell.status("Uploading", pkg.get_package_id().to_string()));
+ try!(transmit(&pkg, &tarball, &mut registry));
+
+ Ok(())
+}
+
+fn verify_dependencies(pkg: &Package, registry_src: &SourceId)
+ -> CargoResult<()> {
+ for dep in pkg.get_dependencies().iter() {
+ if dep.get_source_id().is_path() {
+ if dep.get_specified_req().is_none() {
+ return Err(human(format!("all path dependencies must have \
+ a version specified when being \
+ uploaded to the registry.\n\
+ dependency `{}` does not specify \
+ a version", dep.get_name())))
+ }
+ } else if dep.get_source_id() != registry_src {
+ return Err(human(format!("all dependencies must come from the \
+ same registry.\ndependency `{}` comes \
+ from {} instead", dep.get_name(),
+ dep.get_source_id())))
+ }
+ }
+ Ok(())
+}
+
+fn transmit(pkg: &Package, tarball: &Path, registry: &mut Registry)
+ -> CargoResult<()> {
+ let deps = pkg.get_dependencies().iter().map(|dep| {
+ NewCrateDependency {
+ optional: dep.is_optional(),
+ default_features: dep.uses_default_features(),
+ name: dep.get_name().to_string(),
+ features: dep.get_features().to_vec(),
+ version_req: dep.get_version_req().to_string(),
+ target: None, // FIXME: fill this out
+ }
+ }).collect::<Vec<NewCrateDependency>>();
+ let manifest = pkg.get_manifest();
+ let ManifestMetadata {
+ ref authors, ref description, ref homepage, ref documentation,
+ ref keywords, ref readme, ref repository, ref license,
+ } = *manifest.get_metadata();
+ let readme = match *readme {
+ Some(ref readme) => {
+ let path = pkg.get_root().join(readme.as_slice());
+ Some(try!(File::open(&path).read_to_string().chain_error(|| {
+ human("failed to read the specified README")
+ })))
+ }
+ None => None,
+ };
+ registry.publish(&NewCrate {
+ name: pkg.get_name().to_string(),
+ vers: pkg.get_version().to_string(),
+ deps: deps,
+ features: pkg.get_summary().get_features().clone(),
+ authors: authors.clone(),
+ description: description.clone(),
+ homepage: homepage.clone(),
+ documentation: documentation.clone(),
+ keywords: keywords.clone(),
+ readme: readme,
+ repository: repository.clone(),
+ license: license.clone(),
+ }, tarball).map_err(|e| {
+ human(e.to_string())
+ })
+}
+
+pub fn registry_configuration() -> CargoResult<RegistryConfig> {
+ let configs = try!(config::all_configs(os::getcwd()));
+ let registry = match configs.find_equiv(&"registry") {
+ None => return Ok(RegistryConfig { index: None, token: None }),
+ Some(registry) => try!(registry.table().chain_error(|| {
+ internal("invalid configuration for the key `registry`")
+ })),
+ };
+ let index = match registry.find_equiv(&"index") {
+ None => None,
+ Some(index) => {
+ Some(try!(index.string().chain_error(|| {
+ internal("invalid configuration for key `index`")
+ })).ref0().to_string())
+ }
+ };
+ let token = match registry.find_equiv(&"token") {
+ None => None,
+ Some(token) => {
+ Some(try!(token.string().chain_error(|| {
+ internal("invalid configuration for key `token`")
+ })).ref0().to_string())
+ }
+ };
+ Ok(RegistryConfig { index: index, token: token })
+}
+
+pub fn registry(shell: &mut MultiShell,
+ token: Option<String>,
+ index: Option<String>) -> CargoResult<(Registry, SourceId)> {
+ // Parse all configuration options
+ let RegistryConfig {
+ token: token_config,
+ index: index_config,
+ } = try!(registry_configuration());
+ let token = try!(token.or(token_config).require(|| {
+ human("no upload token found, please run `cargo login`")
+ }));
+ let index = index.or(index_config).unwrap_or(RegistrySource::default_url());
+ let index = try!(index.as_slice().to_url().map_err(human));
+ let sid = SourceId::new(RegistryKind, index.clone());
+ let api_host = {
+ let mut config = try!(Config::new(shell, None, None));
+ let mut src = RegistrySource::new(&sid, &mut config);
+ try!(src.update().chain_error(|| {
+ human(format!("Failed to update registry {}", index))
+ }));
+ (try!(src.config())).api
+ };
+ let handle = try!(http_handle());
+ Ok((Registry::new_handle(api_host, token, handle), sid))
+}
+
+/// Create a new HTTP handle with appropriate global configuration for cargo.
+pub fn http_handle() -> CargoResult<http::Handle> {
+ Ok(match try!(http_proxy()) {
+ Some(proxy) => http::handle().proxy(proxy),
+ None => http::handle(),
+ })
+}
+
+/// Find a globally configured HTTP proxy if one is available.
+///
+/// Favor cargo's `http.proxy`, then git's `http.proxy`, then finally a
+/// HTTP_PROXY env var.
+pub fn http_proxy() -> CargoResult<Option<String>> {
+ let configs = try!(config::all_configs(os::getcwd()));
+ match configs.find_equiv(&"http") {
+ Some(http) => {
+ let http = try!(http.table().chain_error(|| {
+ internal("invalid configuration for the key `http`")
+ }));
+ match http.find_equiv(&"proxy") {
+ Some(proxy) => {
+ return Ok(Some(try!(proxy.string().chain_error(|| {
+ internal("invalid configuration for key `http.proxy`")
+ })).ref0().to_string()))
+ }
+ None => {},
+ }
+ }
+ None => {}
+ }
+ match git2::Config::open_default() {
+ Ok(cfg) => {
+ match cfg.get_str("http.proxy") {
+ Ok(s) => return Ok(Some(s.to_string())),
+ Err(..) => {}
+ }
+ }
+ Err(..) => {}
+ }
+ Ok(os::getenv("HTTP_PROXY"))
+}
+
+pub fn registry_login(shell: &mut MultiShell, token: String) -> CargoResult<()> {
+ let config = try!(Config::new(shell, None, None));
+ let RegistryConfig { index, token: _ } = try!(registry_configuration());
+ let mut map = HashMap::new();
+ let p = os::getcwd();
+ match index {
+ Some(index) => {
+ map.insert("index".to_string(), config::String(index, p.clone()));
+ }
+ None => {}
+ }
+ map.insert("token".to_string(), config::String(token, p));
+
+ config::set_config(&config, config::Global, "registry", config::Table(map))
+}
+
+pub fn modify_owners(manifest_path: &Path,
+ shell: &mut MultiShell,
+ krate: Option<String>,
+ token: Option<String>,
+ index: Option<String>,
+ to_add: Option<Vec<String>>,
+ to_remove: Option<Vec<String>>) -> CargoResult<()> {
+ let name = match krate {
+ Some(name) => name,
+ None => {
+ let mut src = try!(PathSource::for_path(&manifest_path.dir_path()));
+ try!(src.update());
+ let pkg = try!(src.get_root_package());
+ pkg.get_name().to_string()
+ }
+ };
+
+ let (mut registry, _) = try!(registry(shell, token, index));
+
+ match to_add {
+ Some(v) => {
+ let v = v.iter().map(|s| s.as_slice()).collect::<Vec<_>>();
+ try!(shell.status("Owner", format!("adding `{:#}` to `{}`", v, name)));
+ try!(registry.add_owners(name.as_slice(), v.as_slice()).map_err(|e| {
+ human(format!("failed to add owners: {}", e))
+ }));
+ }
+ None => {}
+ }
+
+ match to_remove {
+ Some(v) => {
+ let v = v.iter().map(|s| s.as_slice()).collect::<Vec<_>>();
+ try!(shell.status("Owner", format!("removing `{:#}` from `{}`",
+ v, name)));
+ try!(registry.remove_owners(name.as_slice(), v.as_slice()).map_err(|e| {
+ human(format!("failed to add owners: {}", e))
+ }));
+ }
+ None => {}
+ }
+
+ Ok(())
+}
+
+pub fn yank(manifest_path: &Path,
+ shell: &mut MultiShell,
+ krate: Option<String>,
+ version: Option<String>,
+ token: Option<String>,
+ index: Option<String>,
+ undo: bool) -> CargoResult<()> {
+ let name = match krate {
+ Some(name) => name,
+ None => {
+ let mut src = try!(PathSource::for_path(&manifest_path.dir_path()));
+ try!(src.update());
+ let pkg = try!(src.get_root_package());
+ pkg.get_name().to_string()
+ }
+ };
+ let version = match version {
+ Some(v) => v,
+ None => return Err(human("a version must be specified to yank"))
+ };
+
+ let (mut registry, _) = try!(registry(shell, token, index));
+
+ if undo {
+ try!(shell.status("Unyank", format!("{}:{}", name, version)));
+ try!(registry.unyank(name.as_slice(), version.as_slice()).map_err(|e| {
+ human(format!("failed to undo a yank: {}", e))
+ }));
+ } else {
+ try!(shell.status("Yank", format!("{}:{}", name, version)));
+ try!(registry.yank(name.as_slice(), version.as_slice()).map_err(|e| {
+ human(format!("failed to yank: {}", e))
+ }));
+ }
+
+ Ok(())
+}
-use std::io::{mod, fs, File, MemReader};
+use std::io::{mod, fs, File};
use std::io::fs::PathExtensions;
use std::collections::HashMap;
#[deriving(Decodable)]
pub struct RegistryConfig {
pub dl: String,
- pub upload: String,
+ pub api: String,
}
#[deriving(Decodable)]
struct RegistryPackage {
name: String,
vers: String,
- deps: Vec<String>,
+ deps: Vec<RegistryDependency>,
features: HashMap<String, Vec<String>>,
cksum: String,
}
+#[deriving(Decodable)]
+struct RegistryDependency {
+ name: String,
+ req: String,
+ features: Vec<String>,
+ optional: bool,
+ default_features: bool,
+ target: Option<String>,
+}
+
impl<'a, 'b> RegistrySource<'a, 'b> {
pub fn new(source_id: &SourceId,
config: &'a mut Config<'b>) -> RegistrySource<'a, 'b> {
/// This is the main cargo registry by default, but it can be overridden in
/// a .cargo/config
pub fn url() -> CargoResult<Url> {
- let config = try!(ops::upload_configuration());
- let url = config.host.unwrap_or(CENTRAL.to_string());
+ let config = try!(ops::registry_configuration());
+ let url = config.index.unwrap_or(CENTRAL.to_string());
url.as_slice().to_url().map_err(human)
}
+ /// Get the default url for the registry
+ pub fn default_url() -> String {
+ CENTRAL.to_string()
+ }
+
/// Decode the configuration stored within the registry.
///
/// This requires that the index has been at least checked out.
/// No action is taken if the package is already downloaded.
fn download_package(&mut self, pkg: &PackageId, url: &Url)
-> CargoResult<Path> {
- let dst = self.cache_path.join(url.path().unwrap().last().unwrap()
- .as_slice());
+ // TODO: should discover from the S3 redirect
+ let filename = format!("{}-{}.tar.gz", pkg.get_name(), pkg.get_version());
+ let dst = self.cache_path.join(filename);
if dst.exists() { return Ok(dst) }
try!(self.config.shell().status("Downloading", pkg));
}
};
// TODO: don't download into memory (curl-rust doesn't expose it)
- let resp = try!(handle.get(url.to_string()).exec());
+ let resp = try!(handle.get(url.to_string()).follow_redirects(true).exec());
if resp.get_code() != 200 && resp.get_code() != 0 {
return Err(internal(format!("Failed to get 200 reponse from {}\n{}",
url, resp)))
try!(fs::mkdir_recursive(&dst.dir_path(), io::USER_DIR));
let f = try!(File::open(&tarball));
- let mut gz = try!(GzDecoder::new(f));
- // TODO: don't read into memory (Archive requires Seek)
- let mem = try!(gz.read_to_end());
- let tar = Archive::new(MemReader::new(mem));
- for file in try!(tar.files()) {
- let mut file = try!(file);
- let dst = dst.dir_path().join(file.filename_bytes());
- try!(fs::mkdir_recursive(&dst.dir_path(), io::USER_DIR));
- let mut dst = try!(File::create(&dst));
- try!(io::util::copy(&mut file, &mut dst));
- }
+ let gz = try!(GzDecoder::new(f));
+ let mut tar = Archive::new(gz);
+ try!(tar.unpack(&dst.dir_path()));
try!(File::create(&dst.join(".cargo-ok")));
Ok(dst)
}
/// Parse a line from the registry's index file into a Summary for a
/// package.
fn parse_registry_package(&mut self, line: &str) -> CargoResult<Summary> {
- let pkg = try!(json::decode::<RegistryPackage>(line));
- let pkgid = try!(PackageId::new(pkg.name.as_slice(),
- pkg.vers.as_slice(),
+ let RegistryPackage {
+ name, vers, cksum, deps, features
+ } = try!(json::decode::<RegistryPackage>(line));
+ let pkgid = try!(PackageId::new(name.as_slice(),
+ vers.as_slice(),
&self.source_id));
- let deps: CargoResult<Vec<Dependency>> = pkg.deps.iter().map(|dep| {
- self.parse_registry_dependency(dep.as_slice())
+ let deps: CargoResult<Vec<Dependency>> = deps.into_iter().map(|dep| {
+ self.parse_registry_dependency(dep)
}).collect();
let deps = try!(deps);
- let RegistryPackage { name, vers, cksum, .. } = pkg;
self.hashes.insert((name, vers), cksum);
- Summary::new(pkgid, deps, pkg.features)
+ Summary::new(pkgid, deps, features)
}
- /// Parse a dependency listed in the registry into a `Dependency`.
- ///
- /// Currently the format for dependencies is:
- ///
- /// ```notrust
- /// dep := ['-'] ['*'] name '|' [ name ',' ] * '|' version_req
- /// ```
- ///
- /// The '-' indicates that this is an optional dependency, and the '*'
- /// indicates that the dependency does *not* use the default features
- /// provided. The comma-separate list of names in brackets are the enabled
- /// features for the dependency, and the final element is the version
- /// requirement of the dependency.
- fn parse_registry_dependency(&self, dep: &str) -> CargoResult<Dependency> {
- let mut parts = dep.as_slice().splitn(2, '|');
- let name = parts.next().unwrap();
- let features = try!(parts.next().require(|| {
- human(format!("malformed dependency in registry: {}", dep))
- }));
- let vers = try!(parts.next().require(|| {
- human(format!("malformed dependency in registry: {}", dep))
- }));
- let (name, optional) = if name.starts_with("-") {
- (name.slice_from(1), true)
- } else {
- (name, false)
- };
- let (name, default_features) = if name.starts_with("*") {
- (name.slice_from(1), false)
- } else {
- (name, true)
- };
- let features = features.split(',').filter(|s| !s.is_empty())
- .map(|s| s.to_string()).collect();
- let dep = try!(Dependency::parse(name, Some(vers), &self.source_id));
+ /// Converts an encoded dependency in the registry to a cargo dependency
+ fn parse_registry_dependency(&self, dep: RegistryDependency)
+ -> CargoResult<Dependency> {
+ let RegistryDependency {
+ name, req, features, optional, default_features, target
+ } = dep;
+
+ let dep = try!(Dependency::parse(name.as_slice(), Some(req.as_slice()),
+ &self.source_id));
+ drop(target); // FIXME: pass this in
Ok(dep.optional(optional)
.default_features(default_features)
.features(features))
if self.source_id != *package.get_source_id() { continue }
let mut url = url.clone();
- url.path_mut().unwrap().push("pkg".to_string());
url.path_mut().unwrap().push(package.get_name().to_string());
- url.path_mut().unwrap().push(format!("{}-{}.tar.gz",
- package.get_name(),
- package.get_version()));
+ url.path_mut().unwrap().push(package.get_version().to_string());
+ url.path_mut().unwrap().push("download".to_string());
let path = try!(self.download_package(package, &url).chain_error(|| {
internal(format!("Failed to download package `{}` from {}",
package, url))
use serialize::{Decodable, Decoder};
use core::{SourceId, GitKind};
-use core::manifest::{LibKind, Lib, Dylib, Profile};
+use core::manifest::{LibKind, Lib, Dylib, Profile, ManifestMetadata};
use core::{Summary, Manifest, Target, Dependency, PackageId};
use core::package_id::Metadata;
use util::{CargoResult, Require, human, ToUrl, ToSemver};
pub struct TomlProject {
name: String,
version: TomlVersion,
- pub authors: Vec<String>,
+ authors: Vec<String>,
build: Option<TomlBuildCommandsList>,
exclude: Option<Vec<String>>,
+
+ // package metadata
+ description: Option<String>,
+ homepage: Option<String>,
+ documentation: Option<String>,
+ readme: Option<String>,
+ keywords: Option<Vec<String>>,
+ license: Option<String>,
+ repository: Option<String>,
}
#[deriving(Decodable)]
let summary = try!(Summary::new(pkgid, deps,
self.features.clone()
.unwrap_or(HashMap::new())));
+ let metadata = ManifestMetadata {
+ description: project.description.clone(),
+ homepage: project.homepage.clone(),
+ documentation: project.documentation.clone(),
+ readme: project.readme.clone(),
+ authors: project.authors.clone(),
+ license: project.license.clone(),
+ repository: project.repository.clone(),
+ keywords: project.keywords.clone().unwrap_or(Vec::new()),
+ };
let mut manifest = Manifest::new(summary,
targets,
layout.root.join("target"),
layout.root.join("doc"),
sources,
build,
- exclude);
+ exclude,
+ metadata);
if used_deprecated_lib {
manifest.add_warning(format!("the [[lib]] section has been \
deprecated in favor of [lib]"));
# Configuration keys related to the registry
[registry]
-host = "..." # URL of the registry (defaults to the central repository)
+index = "..." # URL of the registry index (defaults to the central repository)
token = "..." # Access token (found on the central repo's website)
[http]
--- /dev/null
+[package]
+name = "registry"
+version = "0.0.1-pre"
+authors = ["Alex Crichton <alex@alexcrichton.com>"]
+
+[lib]
+name = "registry"
+path = "lib.rs"
+
+[dependencies.curl]
+git = "https://github.com/alexcrichton/curl-rust"
+branch = "bundle"
--- /dev/null
+extern crate curl;
+extern crate serialize;
+
+use std::fmt;
+use std::io::{mod, fs, MemReader, MemWriter, File};
+use std::collections::HashMap;
+use std::io::util::ChainedReader;
+use std::result;
+
+use curl::http;
+use serialize::json;
+
+pub struct Registry {
+ host: String,
+ token: String,
+ handle: http::Handle,
+}
+
+pub type Result<T> = result::Result<T, Error>;
+
+pub enum Error {
+ CurlError(curl::ErrCode),
+ NotOkResponse(http::Response),
+ NonUtf8Body,
+ ApiErrors(Vec<String>),
+ Unauthorized,
+ IoError(io::IoError),
+}
+
+#[deriving(Encodable)]
+pub struct NewCrate {
+ pub name: String,
+ pub vers: String,
+ pub deps: Vec<NewCrateDependency>,
+ pub features: HashMap<String, Vec<String>>,
+ pub authors: Vec<String>,
+ pub description: Option<String>,
+ pub documentation: Option<String>,
+ pub homepage: Option<String>,
+ pub readme: Option<String>,
+ pub keywords: Vec<String>,
+ pub license: Option<String>,
+ pub repository: Option<String>,
+}
+
+#[deriving(Encodable)]
+pub struct NewCrateDependency {
+ pub optional: bool,
+ pub default_features: bool,
+ pub name: String,
+ pub features: Vec<String>,
+ pub version_req: String,
+ pub target: Option<String>,
+}
+
+#[deriving(Decodable)] struct R { ok: bool }
+#[deriving(Decodable)] struct ApiErrorList { errors: Vec<ApiError> }
+#[deriving(Decodable)] struct ApiError { detail: String }
+#[deriving(Encodable)] struct OwnersReq<'a> { users: &'a [&'a str] }
+
+impl Registry {
+ pub fn new(host: String, token: String) -> Registry {
+ Registry::new_handle(host, token, http::Handle::new())
+ }
+
+ pub fn new_handle(host: String, token: String,
+ handle: http::Handle) -> Registry {
+ Registry {
+ host: host,
+ token: token,
+ handle: handle,
+ }
+ }
+
+ pub fn add_owners(&mut self, krate: &str, owners: &[&str]) -> Result<()> {
+ let body = json::encode(&OwnersReq { users: owners });
+ let body = try!(self.put(format!("/crates/{}/owners", krate),
+ body.as_bytes()));
+ assert!(json::decode::<R>(body.as_slice()).unwrap().ok);
+ Ok(())
+ }
+
+ pub fn remove_owners(&mut self, krate: &str, owners: &[&str]) -> Result<()> {
+ let body = json::encode(&OwnersReq { users: owners });
+ let body = try!(self.delete(format!("/crates/{}/owners", krate),
+ Some(body.as_bytes())));
+ assert!(json::decode::<R>(body.as_slice()).unwrap().ok);
+ Ok(())
+ }
+
+ pub fn publish(&mut self, krate: &NewCrate, tarball: &Path) -> Result<()> {
+ let json = json::encode(krate);
+ // Prepare the body. The format of the upload request is:
+ //
+ // <le u32 of json>
+ // <json request> (metadata for the package)
+ // <le u32 of tarball>
+ // <source tarball>
+ let stat = try!(fs::stat(tarball).map_err(IoError));
+ let header = {
+ let mut w = MemWriter::new();
+ w.write_le_u32(json.len() as u32).unwrap();
+ w.write_str(json.as_slice()).unwrap();
+ w.write_le_u32(stat.size as u32).unwrap();
+ MemReader::new(w.unwrap())
+ };
+ let tarball = try!(File::open(tarball).map_err(IoError));
+ let size = stat.size as uint + header.get_ref().len();
+ let mut body = ChainedReader::new(vec![box header as Box<Reader>,
+ box tarball as Box<Reader>].into_iter());
+
+ let url = format!("{}/api/v1/crates/new", self.host);
+ let response = handle(self.handle.put(url, &mut body)
+ .content_length(size)
+ .header("Authorization",
+ self.token.as_slice())
+ .header("Accept", "application/json")
+ .exec());
+ let _body = try!(response);
+ Ok(())
+ }
+
+ pub fn yank(&mut self, krate: &str, version: &str) -> Result<()> {
+ let body = try!(self.delete(format!("/crates/{}/{}/yank", krate, version),
+ None));
+ assert!(json::decode::<R>(body.as_slice()).unwrap().ok);
+ Ok(())
+ }
+
+ pub fn unyank(&mut self, krate: &str, version: &str) -> Result<()> {
+ let body = try!(self.put(format!("/crates/{}/{}/unyank", krate, version),
+ []));
+ assert!(json::decode::<R>(body.as_slice()).unwrap().ok);
+ Ok(())
+ }
+
+ fn put(&mut self, path: String, b: &[u8]) -> Result<String> {
+ handle(self.handle.put(format!("{}/api/v1{}", self.host, path), b)
+ .header("Authorization", self.token.as_slice())
+ .header("Accept", "application/json")
+ .content_type("application/json")
+ .exec())
+ }
+
+ fn delete(&mut self, path: String, b: Option<&[u8]>) -> Result<String> {
+ let mut req = self.handle.delete(format!("{}/api/v1{}", self.host, path))
+ .header("Authorization", self.token.as_slice())
+ .header("Accept", "application/json")
+ .content_type("application/json");
+ match b {
+ Some(b) => req = req.body(b),
+ None => {}
+ }
+ handle(req.exec())
+ }
+}
+
+fn handle(response: result::Result<http::Response, curl::ErrCode>)
+ -> Result<String> {
+ let response = try!(response.map_err(CurlError));
+ match response.get_code() {
+ 0 => {} // file upload url sometimes
+ 200 => {}
+ 403 => return Err(Unauthorized),
+ _ => return Err(NotOkResponse(response))
+ }
+
+ let body = match String::from_utf8(response.move_body()) {
+ Ok(body) => body,
+ Err(..) => return Err(NonUtf8Body),
+ };
+ match json::decode::<ApiErrorList>(body.as_slice()) {
+ Ok(errors) => {
+ return Err(ApiErrors(errors.errors.into_iter().map(|s| s.detail)
+ .collect()))
+ }
+ Err(..) => {}
+ }
+ Ok(body)
+}
+
+impl fmt::Show for Error {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match *self {
+ NonUtf8Body => write!(f, "reponse body was not utf-8"),
+ CurlError(ref err) => write!(f, "http error: {}", err),
+ NotOkResponse(ref resp) => {
+ write!(f, "failed to get a 200 OK response: {}", resp)
+ }
+ ApiErrors(ref errs) => {
+ write!(f, "api errors: {}", errs.connect(", "))
+ }
+ Unauthorized => write!(f, "unauthorized API access"),
+ IoError(ref e) => write!(f, "io error: {}", e),
+ }
+ }
+}
pub static PACKAGING: &'static str = " Packaging";
pub static DOWNLOADING: &'static str = " Downloading";
pub static UPLOADING: &'static str = " Uploading";
+pub static VERIFYING: &'static str = " Verifying";
use flate2::reader::GzDecoder;
use support::{project, execs, cargo_dir, ResultTest};
-use support::{PACKAGING};
+use support::{PACKAGING, VERIFYING, COMPILING};
use hamcrest::{assert_that, existing_file};
fn setup() {
assert_that(p.cargo_process("package"),
execs().with_status(0).with_stdout(format!("\
{packaging} foo v0.0.1 ({dir})
+{verifying} foo v0.0.1 ({dir})
+{compiling} foo v0.0.1 ({dir}[..])
",
packaging = PACKAGING,
+ verifying = VERIFYING,
+ compiling = COMPILING,
dir = p.url()).as_slice()));
assert_that(&p.root().join("foo-0.0.1.tar.gz"), existing_file());
assert_that(p.process(cargo_dir().join("cargo")).arg("package"),
--- /dev/null
+use std::io::{mod, fs, File, MemReader};
+
+use flate2::reader::GzDecoder;
+use tar::Archive;
+use url::Url;
+
+use support::{ResultTest, project, execs};
+use support::{UPDATING, PACKAGING, UPLOADING};
+use support::paths;
+use support::git::repo;
+
+use hamcrest::assert_that;
+
+fn registry_path() -> Path { paths::root().join("registry") }
+fn registry() -> Url { Url::from_file_path(®istry_path()).unwrap() }
+fn upload_path() -> Path { paths::root().join("upload") }
+fn upload() -> Url { Url::from_file_path(&upload_path()).unwrap() }
+
+fn setup() {
+ let config = paths::root().join(".cargo/config");
+ fs::mkdir_recursive(&config.dir_path(), io::USER_DIR).assert();
+ File::create(&config).write_str(format!(r#"
+ [registry]
+ index = "{reg}"
+ token = "api-token"
+ "#, reg = registry()).as_slice()).assert();
+ fs::mkdir_recursive(&upload_path().join("api/v1/crates"), io::USER_DIR).assert();
+
+ repo(®istry_path())
+ .file("config.json", format!(r#"{{
+ "dl": "{0}",
+ "api": "{0}"
+ }}"#, upload()))
+ .build();
+}
+
+test!(simple {
+ let p = project("foo")
+ .file("Cargo.toml", r#"
+ [project]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ "#)
+ .file("src/main.rs", "fn main() {}");
+
+ assert_that(p.cargo_process("publish").arg("--no-verify"),
+ execs().with_status(0).with_stdout(format!("\
+{updating} registry `{reg}`
+{packaging} foo v0.0.1 ({dir})
+{uploading} foo v0.0.1 ({dir})
+",
+ updating = UPDATING,
+ uploading = UPLOADING,
+ packaging = PACKAGING,
+ dir = p.url(),
+ reg = registry()).as_slice()));
+
+ let mut f = File::open(&upload_path().join("api/v1/crates/new")).unwrap();
+ // Skip the metadata payload and the size of the tarball
+ let sz = f.read_le_u32().unwrap();
+ f.seek(sz as i64 + 4, io::SeekCur).unwrap();
+
+ // Verify the tarball
+ let mut rdr = GzDecoder::new(f).unwrap();
+ assert_eq!(rdr.header().filename(), Some(b"foo-0.0.1.tar.gz"));
+ let inner = MemReader::new(rdr.read_to_end().unwrap());
+ let ar = Archive::new(inner);
+ for file in ar.files().unwrap() {
+ let file = file.unwrap();
+ let fname = file.filename_bytes();
+ assert!(fname == Path::new("foo-0.0.1/Cargo.toml").as_vec() ||
+ fname == Path::new("foo-0.0.1/src/main.rs").as_vec(),
+ "unexpected filename: {}", file.filename())
+ }
+})
+
+test!(git_deps {
+ let p = project("foo")
+ .file("Cargo.toml", r#"
+ [project]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.foo]
+ git = "git://path/to/nowhere"
+ "#)
+ .file("src/main.rs", "fn main() {}");
+
+ assert_that(p.cargo_process("publish").arg("-v").arg("--no-verify"),
+ execs().with_status(101).with_stderr("\
+all dependencies must come from the same registry
+dependency `foo` comes from git://path/to/nowhere instead
+"));
+})
+
+test!(path_dependency_no_version {
+ let p = project("foo")
+ .file("Cargo.toml", r#"
+ [project]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "bar"
+ "#)
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+ "#)
+ .file("bar/src/lib.rs", "");
+
+ assert_that(p.cargo_process("publish"),
+ execs().with_status(101).with_stderr("\
+all path dependencies must have a version specified when being uploaded \
+to the registry
+dependency `bar` does not specify a version
+"));
+})
use serialize::hex::ToHex;
use support::{ResultTest, project, execs, cargo_dir};
-use support::{UPDATING, DOWNLOADING, COMPILING};
+use support::{UPDATING, DOWNLOADING, COMPILING, PACKAGING, VERIFYING};
use support::paths;
use support::git::repo;
use cargo::util::Sha256;
fs::mkdir_recursive(&config.dir_path(), io::USER_DIR).assert();
File::create(&config).write_str(format!(r#"
[registry]
- host = "{reg}"
+ index = "{reg}"
token = "api-token"
"#, reg = registry()).as_slice()).assert();
let foo = include_bin!("fixtures/foo-0.0.1.tar.gz");
let bar = include_bin!("fixtures/bar-0.0.1.tar.gz");
let notyet = include_bin!("fixtures/notyet-0.0.1.tar.gz");
- let foo_cksum = dl("pkg/foo/foo-0.0.1.tar.gz", foo);
- let bar_cksum = dl("pkg/bar/bar-0.0.1.tar.gz", bar);
- dl("pkg/bad-cksum/bad-cksum-0.0.1.tar.gz", foo);
- let notyet = dl("pkg/notyet/notyet-0.0.1.tar.gz", notyet);
+ let foo_cksum = dl("foo", "0.0.1", foo);
+ let bar_cksum = dl("bar", "0.0.1", bar);
+ dl("bad-cksum", "0.0.1", foo);
+ let notyet = dl("notyet", "0.0.1", notyet);
// Init a new registry
repo(®istry_path())
.file("config.json", format!(r#"
- {{"dl":"{}","upload":""}}
+ {{"dl":"{}","api":""}}
"#, dl_url()).as_slice())
.file("3/f/foo", pkg("foo", "0.0.1", [], &foo_cksum))
- .file("3/b/bar", pkg("bar", "0.0.1", ["foo||>=0.0.0"], &bar_cksum))
+ .file("3/b/bar", pkg("bar", "0.0.1", [
+ "{\"name\":\"foo\",\
+ \"req\":\">=0.0.0\",\
+ \"features\":[],\
+ \"default_features\":false,\
+ \"target\":null,\
+ \"optional\":false}"
+ ], &bar_cksum))
.file("ba/d-/bad-cksum", pkg("bad-cksum", "0.0.1", [], &bar_cksum))
.nocommit_file("no/ty/notyet", pkg("notyet", "0.0.1", [], ¬yet))
.build();
fn pkg(name: &str, vers: &str, deps: &[&str], cksum: &String) -> String {
- let deps: Vec<String> = deps.iter().map(|s| {
- format!("\"{}\"", s)
- }).collect();
- let deps = deps.connect(",");
-
- format!(r#"{{"name":"{}","vers":"{}","deps":[{}],"cksum":"{}","features":{{}}}}"#,
+ format!(r#"{{"name":"{}","vers":"{}","deps":{},"cksum":"{}","features":{{}}}}"#,
name, vers, deps, cksum)
}
- fn dl(path: &str, contents: &[u8]) -> String {
- let dst = dl_path().join(path);
+ fn dl(name: &str, vers: &str, contents: &[u8]) -> String {
+ let dst = dl_path().join(name).join(vers).join("download");
fs::mkdir_recursive(&dst.dir_path(), io::USER_DIR).assert();
File::create(&dst).write(contents).unwrap();
cksum(contents)
}
}
+fn publish_notyet() {
+ let repo = git2::Repository::open(®istry_path()).unwrap();
+ let mut index = repo.index().unwrap();
+ index.add_path(&Path::new("no/ty/notyet")).unwrap();
+ let id = index.write_tree().unwrap();
+ let tree = repo.find_tree(id).unwrap();
+ let sig = repo.signature().unwrap();
+ let parent = repo.refname_to_id("refs/heads/master").unwrap();
+ let parent = repo.find_commit(parent).unwrap();
+ repo.commit(Some("HEAD"), &sig, &sig,
+ "Another commit", &tree,
+ [&parent]).unwrap();
+}
+
test!(simple {
let p = project("foo")
.file("Cargo.toml", r#"
version required: >= 0.0.0
"));
- // Add the package and commit
- let repo = git2::Repository::open(®istry_path()).unwrap();
- let mut index = repo.index().unwrap();
- index.add_path(&Path::new("no/ty/notyet")).unwrap();
- let id = index.write_tree().unwrap();
- let tree = repo.find_tree(id).unwrap();
- let sig = repo.signature().unwrap();
- let parent = repo.refname_to_id("refs/heads/master").unwrap();
- let parent = repo.find_commit(parent).unwrap();
- repo.commit(Some("HEAD"), &sig, &sig,
- "Another commit", &tree,
- [&parent]).unwrap();
+ publish_notyet();
assert_that(p.process(cargo_dir().join("cargo")).arg("build"),
execs().with_status(0).with_stdout(format!("\
dir = p.url(),
reg = registry()).as_slice()));
})
+
+test!(package_with_path_deps {
+ let p = project("foo")
+ .file("Cargo.toml", r#"
+ [project]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.notyet]
+ version = "0.0.1"
+ path = "notyet"
+ "#)
+ .file("src/main.rs", "fn main() {}")
+ .file("notyet/Cargo.toml", r#"
+ [package]
+ name = "notyet"
+ version = "0.0.1"
+ authors = []
+ "#)
+ .file("notyet/src/lib.rs", "");
+ p.build();
+
+ assert_that(p.process(cargo_dir().join("cargo")).arg("package").arg("-v"),
+ execs().with_status(101).with_stderr("\
+failed to verify package tarball
+
+Caused by:
+ no package named `notyet` found (required by `foo`)
+location searched: the package registry
+version required: ^0.0.1
+"));
+
+ publish_notyet();
+
+ assert_that(p.process(cargo_dir().join("cargo")).arg("package"),
+ execs().with_status(0).with_stdout(format!("\
+{packaging} foo v0.0.1 ({dir})
+{verifying} foo v0.0.1 ({dir})
+{updating} registry `[..]`
+{downloading} notyet v0.0.1 (the package registry)
+{compiling} notyet v0.0.1 (the package registry)
+{compiling} foo v0.0.1 ({dir})
+",
+ packaging = PACKAGING,
+ verifying = VERIFYING,
+ updating = UPDATING,
+ downloading = DOWNLOADING,
+ compiling = COMPILING,
+ dir = p.url(),
+)));
+})
+++ /dev/null
-use std::io::{mod, fs, File, MemReader};
-
-use flate2::reader::GzDecoder;
-use tar::Archive;
-use url::Url;
-
-use support::{ResultTest, project, execs};
-use support::{UPDATING, PACKAGING, UPLOADING};
-use support::paths;
-use support::git::repo;
-
-use hamcrest::assert_that;
-
-fn registry_path() -> Path { paths::root().join("registry") }
-fn registry() -> Url { Url::from_file_path(®istry_path()).unwrap() }
-fn upload_path() -> Path { paths::root().join("upload") }
-fn upload() -> Url { Url::from_file_path(&upload_path()).unwrap() }
-
-fn setup() {
- let config = paths::root().join(".cargo/config");
- fs::mkdir_recursive(&config.dir_path(), io::USER_DIR).assert();
- File::create(&config).write_str(format!(r#"
- [registry]
- host = "{reg}"
- token = "api-token"
- "#, reg = registry()).as_slice()).assert();
-
- repo(®istry_path())
- .file("config.json", format!(r#"{{
- "dl": "",
- "upload": "{}"
- }}"#, upload()))
- .build();
-}
-
-test!(simple {
- let p = project("foo")
- .file("Cargo.toml", r#"
- [project]
- name = "foo"
- version = "0.0.1"
- authors = []
- "#)
- .file("src/main.rs", "fn main() {}");
-
- assert_that(p.cargo_process("upload"),
- execs().with_status(0).with_stdout(format!("\
-{updating} registry `{reg}`
-{packaging} foo v0.0.1 ({dir})
-{uploading} foo v0.0.1 ({dir})
-",
- updating = UPDATING,
- uploading = UPLOADING,
- packaging = PACKAGING,
- dir = p.url(),
- reg = registry()).as_slice()));
-
- let mut rdr = GzDecoder::new(File::open(&upload_path()).unwrap()).unwrap();
- assert_eq!(rdr.header().filename(), Some(b"foo-0.0.1.tar.gz"));
- let inner = MemReader::new(rdr.read_to_end().unwrap());
- let ar = Archive::new(inner);
- for file in ar.files().unwrap() {
- let file = file.unwrap();
- let fname = file.filename_bytes();
- assert!(fname == Path::new("foo-0.0.1/Cargo.toml").as_vec() ||
- fname == Path::new("foo-0.0.1/src/main.rs").as_vec(),
- "unexpected filename: {}", file.filename())
- }
-})
-
-test!(git_deps {
- let p = project("foo")
- .file("Cargo.toml", r#"
- [project]
- name = "foo"
- version = "0.0.1"
- authors = []
-
- [dependencies.foo]
- git = "git://path/to/nowhere"
- "#)
- .file("src/main.rs", "fn main() {}");
-
- assert_that(p.cargo_process("upload").arg("-v"),
- execs().with_status(101).with_stderr("\
-failed to upload package to registry: [..]
-
-Caused by:
- All dependencies must come from the same registry.
-Dependency `foo` comes from git://path/to/nowhere instead
-"));
-})
mod test_cargo_package;
mod test_cargo_build_auth;
mod test_cargo_registry;
-mod test_cargo_upload;
+mod test_cargo_publish;
mod test_cargo_fetch;